Effect of UPSTM-Based
Decorrelation on Feature Discovery
Loading the
libraries
library("FRESA.CAD")
library(readxl)
library(igraph)
library(umap)
library(tsne)
library(entropy)
library(psych)
library(whitening)
library("vioplot")
library("rpart")
op <- par(no.readonly = TRUE)
pander::panderOptions('digits', 3)
pander::panderOptions('table.split.table', 400)
pander::panderOptions('keep.trailing.zeros',TRUE)
Material and
Methods
Data Source https://archive.ics.uci.edu/ml/datasets/seeds
M. Charytanowicz, J. Niewczas, P. Kulczycki, P.A. Kowalski, S.
Lukasik, S. Zak, ‘A Complete Gradient Clustering Algorithm for Features
Analysis of X-ray Images’, in: Information Technologies in Biomedicine,
Ewa Pietka, Jacek Kawa (eds.), Springer-Verlag, Berlin-Heidelberg, 2010,
pp. 15-24.
The Data
seeds <- read.delim("~/GitHub/LatentBiomarkers/Data/seeds_dataset.txt", header=FALSE)
par(cex=0.5)
featnames <- c("area",
"perimeter",
"compactness",
"length_of_kernel",
"width_of_kernel",
"asymmetry_coeff",
"length_ker_groove",
"class"
)
colnames(seeds) <- featnames
seeds$class <- 1*(seeds$class == 1)
pander::pander(table(seeds$class))
Standarize the
names for the reporting
studyName <- "Seeds"
dataframe <- seeds
outcome <- "class"
thro <- 0.80
TopVariables <- 5
cexheat = 0.45
Generaring the
report
Libraries
Some libraries
library(psych)
library(whitening)
library("vioplot")
library("rpart")
Data specs
pander::pander(c(rows=nrow(dataframe),col=ncol(dataframe)-1))
pander::pander(table(dataframe[,outcome]))
varlist <- colnames(dataframe)
varlist <- varlist[varlist != outcome]
largeSet <- length(varlist) > 1500
Scaling the
data
Scaling and removing near zero variance columns and highly
co-linear(r>0.99999) columns
### Some global cleaning
sdiszero <- apply(dataframe,2,sd) > 1.0e-16
dataframe <- dataframe[,sdiszero]
varlist <- colnames(dataframe)[colnames(dataframe) != outcome]
tokeep <- c(as.character(correlated_Remove(dataframe,varlist,thr=0.99999)),outcome)
dataframe <- dataframe[,tokeep]
varlist <- colnames(dataframe)
varlist <- varlist[varlist != outcome]
iscontinous <- sapply(apply(dataframe,2,unique),length) >= 5 ## Only variables with enough samples
dataframeScaled <- FRESAScale(dataframe,method="OrderLogit")$scaledData
The heatmap of the
data
numsub <- nrow(dataframe)
if (numsub > 1000) numsub <- 1000
if (!largeSet)
{
hm <- heatMaps(data=dataframeScaled[1:numsub,],
Outcome=outcome,
Scale=TRUE,
hCluster = "row",
xlab="Feature",
ylab="Sample",
srtCol=45,
srtRow=45,
cexCol=cexheat,
cexRow=cexheat
)
par(op)
}

Correlation
Matrix of the Data
The heat map of the data
if (!largeSet)
{
par(cex=0.6,cex.main=0.85,cex.axis=0.7)
#cormat <- Rfast::cora(as.matrix(dataframe[,varlist]),large=TRUE)
cormat <- cor(dataframe[,varlist],method="pearson")
cormat[is.na(cormat)] <- 0
gplots::heatmap.2(abs(cormat),
trace = "none",
# scale = "row",
mar = c(5,5),
col=rev(heat.colors(5)),
main = "Original Correlation",
cexRow = cexheat,
cexCol = cexheat,
srtCol=45,
srtRow=45,
key.title=NA,
key.xlab="|Pearson Correlation|",
xlab="Feature", ylab="Feature")
diag(cormat) <- 0
print(max(abs(cormat)))
}
[1]
0.9943409
The
decorrelation
DEdataframe <- IDeA(dataframe,verbose=TRUE,thr=thro)
#>
#> area
#> area perimeter compactness length_of_kernel
#> 1.0000000 0.8571429 0.2857143 0.5714286
#> width_of_kernel asymmetry_coeff
#> 0.7142857 0.1428571
#>
#> Included: 7 , Uni p: 0.02142857 , Base Size: 1 , Rcrit: 0.1398922
#>
#>
1 <R=0.977,thr=0.950>, Top: 1< 2 >[Fa= 1 ]( 1 , 2 , 0 ),<|><>Tot Used: 3 , Added: 2 , Zero Std: 0 , Max Cor: 0.950
#>
2 <R=0.898,thr=0.900>, Top: 1< 1 >[Fa= 1 ]( 1 , 1 , 1 ),<|><>Tot Used: 4 , Added: 1 , Zero Std: 0 , Max Cor: 0.864
#>
3 <R=0.849,thr=0.800>, Top: 2< 1 >[Fa= 2 ]( 2 , 3 , 1 ),<|><>Tot Used: 5 , Added: 3 , Zero Std: 0 , Max Cor: 0.712
#>
4 <R=0.712,thr=0.800>
#>
[ 4 ], 0.5972333 Decor Dimension: 5 Nused: 5 . Cor to Base: 4 , ABase: 7 , Outcome Base: 0
#>
varlistc <- colnames(DEdataframe)[colnames(DEdataframe) != outcome]
pander::pander(sum(apply(dataframe[,varlist],2,var)))
13
pander::pander(sum(apply(DEdataframe[,varlistc],2,var)))
10.8
pander::pander(entropy(discretize(unlist(dataframe[,varlist]), 256)))
4.55
pander::pander(entropy(discretize(unlist(DEdataframe[,varlistc]), 256)))
3.96
varratio <- attr(DEdataframe,"VarRatio")
pander::pander(tail(varratio))
| 1 |
1 |
0.254 |
0.029 |
0.0206 |
0.0113 |
The decorrelation
matrix
if (!largeSet)
{
par(cex=0.6,cex.main=0.85,cex.axis=0.7)
UPLTM <- attr(DEdataframe,"UPLTM")
gplots::heatmap.2(1.0*(abs(UPLTM)>0),
trace = "none",
mar = c(5,5),
col=rev(heat.colors(5)),
main = "Decorrelation matrix",
cexRow = cexheat,
cexCol = cexheat,
srtCol=45,
srtRow=45,
key.title=NA,
key.xlab="|Beta|>0",
xlab="Output Feature", ylab="Input Feature")
par(op)
}

Formulas
Network
Displaying the features associations
par(op)
clustable <- c("To many variables")
transform <- attr(DEdataframe,"UPLTM") != 0
tnames <- colnames(transform)
colnames(transform) <- str_remove_all(colnames(transform),"La_")
transform <- abs(transform*cor(dataframe[,rownames(transform)])) # The weights are proportional to the observed correlation
fscore <- attr(DEdataframe,"fscore")
VertexSize <- fscore # The size depends on the variable independence relevance (fscore)
names(VertexSize) <- str_remove_all(names(VertexSize),"La_")
VertexSize <- 10*(VertexSize-min(VertexSize))/(max(VertexSize)-min(VertexSize)) # Normalization
VertexSize <- VertexSize[rownames(transform)]
rsum <- apply(1*(transform !=0),1,sum) + 0.01*VertexSize + 0.001*varratio[tnames]
csum <- apply(1*(transform !=0),2,sum) + 0.01*VertexSize + 0.001*varratio[tnames]
ntop <- min(10,length(rsum))
topfeatures <- unique(c(names(rsum[order(-rsum)])[1:ntop],names(csum[order(-csum)])[1:ntop]))
rtrans <- transform[topfeatures,]
csum <- (apply(1*(rtrans !=0),2,sum) > 1*(colnames(rtrans) %in% topfeatures))
rtrans <- rtrans[,csum]
topfeatures <- unique(c(topfeatures,colnames(rtrans)))
print(ncol(transform))
[1] 5
transform <- transform[topfeatures,topfeatures]
print(ncol(transform))
[1] 5
if (ncol(transform)>100)
{
csum <- apply(1*(transform !=0),1,sum)
csum <- csum[csum > 1]
csum <- csum + 0.01*VertexSize[names(csum)]
csum <- csum[order(-csum)]
tpsum <- min(20,length(csum))
trsum <- rownames(transform)[rownames(transform) %in% names(csum[1:tpsum])]
rtrans <- transform[trsum,]
topfeatures <- unique(c(rownames(rtrans),colnames(rtrans)))
transform <- transform[topfeatures,topfeatures]
if (nrow(transform) > 150)
{
csum <- apply(1*(rtrans != 0 ),2,sum)
csum <- csum + 0.01*VertexSize[names(csum)]
csum <- csum[order(-csum)]
tpsum <- min(130,length(csum))
csum <- rownames(transform)[rownames(transform) %in% names(csum[1:tpsum])]
csum <- unique(c(trsum,csum))
transform <- transform[csum,csum]
}
print(ncol(transform))
}
if (ncol(transform) < 150)
{
gplots::heatmap.2(transform,
trace = "none",
mar = c(5,5),
col=rev(heat.colors(5)),
main = "Red Decorrelation matrix",
cexRow = cexheat,
cexCol = cexheat,
srtCol=45,
srtRow=45,
key.title=NA,
key.xlab="|Beta|>0",
xlab="Output Feature", ylab="Input Feature")
par(op)
VertexSize <- VertexSize[colnames(transform)]
gr <- graph_from_adjacency_matrix(transform,mode = "directed",diag = FALSE,weighted=TRUE)
gr$layout <- layout_with_fr
# fc <- cluster_optimal(gr)
fc <- cluster_walktrap (gr,steps=50)
plot(fc, gr,
edge.width = 2*E(gr)$weight,
vertex.size=VertexSize,
edge.arrow.size=0.5,
edge.arrow.width=0.5,
vertex.label.cex=(0.15+0.05*VertexSize),
vertex.label.dist=0.5 + 0.05*VertexSize,
main="Top Feature Association")
varratios <- varratio
fscores <- fscore
names(varratios) <- str_remove_all(names(varratios),"La_")
names(fscores) <- str_remove_all(names(fscores),"La_")
dc <- getLatentCoefficients(DEdataframe)
theCharformulas <- attr(dc,"LatentCharFormulas")
clustable <- as.data.frame(cbind(Variable=fc$names,
Formula=as.character(theCharformulas[paste("La_",fc$names,sep="")]),
Class=fc$membership,
ResidualVariance=round(varratios[fc$names],3),
Fscore=round(fscores[fc$names],3)
)
)
rownames(clustable) <- str_replace_all(rownames(clustable),"__","_")
clustable$Variable <- NULL
clustable$Class <- as.integer(clustable$Class)
clustable$ResidualVariance <- as.numeric(clustable$ResidualVariance)
clustable$Fscore <- as.numeric(clustable$Fscore)
clustable <- clustable[order(-clustable$Fscore),]
clustable <- clustable[order(clustable$Class),]
clustable <- clustable[clustable$Fscore >= -1,]
topv <- min(50,nrow(clustable))
clustable <- clustable[1:topv,]
}


pander::pander(clustable)
| area |
NA |
1 |
1.000 |
4 |
| perimeter |
- (0.446)area + perimeter |
1 |
0.011 |
1 |
| length_ker_groove |
- (0.146)area + length_ker_groove |
1 |
0.254 |
-1 |
par(op)
The correlation
matrix after decorrelation
if (!largeSet)
{
cormat <- cor(DEdataframe[,varlistc],method="pearson")
cormat[is.na(cormat)] <- 0
gplots::heatmap.2(abs(cormat),
trace = "none",
mar = c(5,5),
col=rev(heat.colors(5)),
main = "Correlation after ILAA",
cexRow = cexheat,
cexCol = cexheat,
srtCol=45,
srtRow=45,
key.title=NA,
key.xlab="|Pearson Correlation|",
xlab="Feature", ylab="Feature")
par(op)
diag(cormat) <- 0
print(max(abs(cormat)))
}
[1]
0.7116456
U-MAP Visualization
of features
The UMAP on Raw
Data
classes <- unique(dataframe[1:numsub,outcome])
raincolors <- rainbow(length(classes))
names(raincolors) <- classes
topvars <- univariate_BinEnsemble(dataframe,outcome)
lso <- LASSO_MIN(formula(paste(outcome,"~.")),dataframe,family="binomial")
topvars <- unique(c(names(topvars),lso$selectedfeatures))
pander::pander(head(topvars))
asymmetry_coeff, length_ker_groove,
compactness, length_of_kernel, perimeter and
width_of_kernel
# names(topvars)
#if (nrow(dataframe) < 1000)
#{
datasetframe.umap = umap(scale(dataframe[1:numsub,topvars]),n_components=2)
# datasetframe.umap = umap(dataframe[1:numsub,varlist],n_components=2)
plot(datasetframe.umap$layout,xlab="U1",ylab="U2",main="UMAP: Original",t='n')
text(datasetframe.umap$layout,labels=dataframe[1:numsub,outcome],col=raincolors[dataframe[1:numsub,outcome]+1])

#}
The decorralted
UMAP
varlistcV <- names(varratio[varratio >= 0.01])
topvars <- univariate_BinEnsemble(DEdataframe[,varlistcV],outcome)
lso <- LASSO_MIN(formula(paste(outcome,"~.")),DEdataframe[,varlistcV],family="binomial")
topvars <- unique(c(names(topvars),lso$selectedfeatures))
pander::pander(head(topvars))
La_length_ker_groove, asymmetry_coeff,
La_width_of_kernel, compactness, La_perimeter
and La_length_of_kernel
varlistcV <- varlistcV[varlistcV != outcome]
# DEdataframe[,outcome] <- as.numeric(DEdataframe[,outcome])
#if (nrow(dataframe) < 1000)
#{
datasetframe.umap = umap(scale(DEdataframe[1:numsub,topvars]),n_components=2)
# datasetframe.umap = umap(DEdataframe[1:numsub,varlistcV],n_components=2)
plot(datasetframe.umap$layout,xlab="U1",ylab="U2",main="UMAP: After ILAA",t='n')
text(datasetframe.umap$layout,labels=DEdataframe[1:numsub,outcome],col=raincolors[DEdataframe[1:numsub,outcome]+1])

#}
Univariate
Analysis
Univariate
univarRAW <- uniRankVar(varlist,
paste(outcome,"~1"),
outcome,
dataframe,
rankingTest="AUC")
univarDe <- uniRankVar(varlistc,
paste(outcome,"~1"),
outcome,
DEdataframe,
rankingTest="AUC",
)
Final Table
univariate_columns <- c("caseMean","caseStd","controlMean","controlStd","controlKSP","ROCAUC")
##top variables
topvar <- c(1:length(varlist)) <= TopVariables
tableRaw <- univarRAW$orderframe[topvar,univariate_columns]
pander::pander(tableRaw)
| asymmetry_coeff |
2.67 |
1.1739 |
4.217 |
1.3818 |
9.80e-01 |
0.810 |
| length_ker_groove |
5.09 |
0.2637 |
5.569 |
0.5009 |
1.98e-03 |
0.764 |
| compactness |
0.88 |
0.0162 |
0.866 |
0.0254 |
5.69e-01 |
0.653 |
| length_of_kernel |
5.51 |
0.2315 |
5.689 |
0.5075 |
3.68e-04 |
0.562 |
| perimeter |
14.29 |
0.5766 |
14.692 |
1.5318 |
2.01e-05 |
0.524 |
topLAvar <- univarDe$orderframe$Name[str_detect(univarDe$orderframe$Name,"La_")]
topLAvar <- unique(c(univarDe$orderframe$Name[topvar],topLAvar[1:as.integer(TopVariables/2)]))
finalTable <- univarDe$orderframe[topLAvar,univariate_columns]
pander::pander(finalTable)
| La_length_ker_groove |
3.00 |
0.1832 |
3.365 |
0.1729 |
0.849 |
0.927 |
| asymmetry_coeff |
2.67 |
1.1739 |
4.217 |
1.3818 |
0.980 |
0.810 |
| La_width_of_kernel |
5.58 |
0.0483 |
5.529 |
0.0499 |
0.828 |
0.753 |
| compactness |
0.88 |
0.0162 |
0.866 |
0.0254 |
0.569 |
0.653 |
| La_length_of_kernel |
-3.17 |
0.0582 |
-3.144 |
0.0817 |
0.601 |
0.610 |
dc <- getLatentCoefficients(DEdataframe)
fscores <- attr(DEdataframe,"fscore")
pander::pander(c(mean=mean(sapply(dc,length)),total=length(dc),fraction=length(dc)/(ncol(dataframe)-1)))
theCharformulas <- attr(dc,"LatentCharFormulas")
topvar <- rownames(tableRaw)
finalTable <- rbind(finalTable,tableRaw[topvar[!(topvar %in% topLAvar)],univariate_columns])
orgnamez <- rownames(finalTable)
orgnamez <- str_remove_all(orgnamez,"La_")
finalTable$RAWAUC <- univarRAW$orderframe[orgnamez,"ROCAUC"]
finalTable$DecorFormula <- theCharformulas[rownames(finalTable)]
finalTable$fscores <- fscores[rownames(finalTable)]
finalTable$varratio <- varratio[rownames(finalTable)]
Final_Columns <- c("DecorFormula","caseMean","caseStd","controlMean","controlStd","controlKSP","ROCAUC","RAWAUC","fscores","varratio")
finalTable <- finalTable[order(-finalTable$ROCAUC),]
pander::pander(finalTable[,Final_Columns])
| La_length_ker_groove |
- (0.146)area + length_ker_groove |
3.00 |
0.1832 |
3.365 |
0.1729 |
8.49e-01 |
0.927 |
0.764 |
-1 |
0.2540 |
| asymmetry_coeff |
NA |
2.67 |
1.1739 |
4.217 |
1.3818 |
9.80e-01 |
0.810 |
0.810 |
0 |
1.0000 |
| length_ker_groove |
NA |
5.09 |
0.2637 |
5.569 |
0.5009 |
1.98e-03 |
0.764 |
0.764 |
NA |
NA |
| La_width_of_kernel |
- (0.360)area + (0.524)perimeter +
width_of_kernel |
5.58 |
0.0483 |
5.529 |
0.0499 |
8.28e-01 |
0.753 |
0.501 |
-2 |
0.0206 |
| compactness |
NA |
0.88 |
0.0162 |
0.866 |
0.0254 |
5.69e-01 |
0.653 |
0.653 |
0 |
1.0000 |
| La_length_of_kernel |
+ (0.228)area - (0.836)perimeter +
length_of_kernel |
-3.17 |
0.0582 |
-3.144 |
0.0817 |
6.01e-01 |
0.610 |
0.562 |
-2 |
0.0290 |
| length_of_kernel |
NA |
5.51 |
0.2315 |
5.689 |
0.5075 |
3.68e-04 |
0.562 |
0.562 |
NA |
NA |
| perimeter |
NA |
14.29 |
0.5766 |
14.692 |
1.5318 |
2.01e-05 |
0.524 |
0.524 |
NA |
NA |
Comparing ILAA vs
PCA vs EFA
PCA
featuresnames <- colnames(dataframe)[colnames(dataframe) != outcome]
pc <- prcomp(dataframe[,iscontinous],center = TRUE,scale. = TRUE,tol=0.01) #principal components
predPCA <- predict(pc,dataframe[,iscontinous])
PCAdataframe <- as.data.frame(cbind(predPCA,dataframe[,!iscontinous]))
colnames(PCAdataframe) <- c(colnames(predPCA),colnames(dataframe)[!iscontinous])
#plot(PCAdataframe[,colnames(PCAdataframe)!=outcome],col=dataframe[,outcome],cex=0.65,cex.lab=0.5,cex.axis=0.75,cex.sub=0.5,cex.main=0.75)
#pander::pander(pc$rotation)
PCACor <- cor(PCAdataframe[,colnames(PCAdataframe) != outcome])
gplots::heatmap.2(abs(PCACor),
trace = "none",
# scale = "row",
mar = c(5,5),
col=rev(heat.colors(5)),
main = "PCA Correlation",
cexRow = 0.5,
cexCol = 0.5,
srtCol=45,
srtRow= -45,
key.title=NA,
key.xlab="Pearson Correlation",
xlab="Feature", ylab="Feature")

EFA
EFAdataframe <- dataframeScaled
if (length(iscontinous) < 2000)
{
topred <- min(length(iscontinous),nrow(dataframeScaled),ncol(predPCA)-1)
if (topred < 2) topred <- 2
uls <- fa(dataframeScaled[,iscontinous],nfactors=topred,rotate="varimax",warnings=FALSE) # EFA analysis
predEFA <- predict(uls,dataframeScaled[,iscontinous])
EFAdataframe <- as.data.frame(cbind(predEFA,dataframeScaled[,!iscontinous]))
colnames(EFAdataframe) <- c(colnames(predEFA),colnames(dataframeScaled)[!iscontinous])
EFACor <- cor(EFAdataframe[,colnames(EFAdataframe) != outcome])
gplots::heatmap.2(abs(EFACor),
trace = "none",
# scale = "row",
mar = c(5,5),
col=rev(heat.colors(5)),
main = "EFA Correlation",
cexRow = 0.5,
cexCol = 0.5,
srtCol=45,
srtRow= -45,
key.title=NA,
key.xlab="Pearson Correlation",
xlab="Feature", ylab="Feature")
}

Effect on CAR
modeling
par(op)
par(xpd = TRUE)
dataframe[,outcome] <- factor(dataframe[,outcome])
rawmodel <- rpart(paste(outcome,"~."),dataframe,control=rpart.control(maxdepth=3))
pr <- predict(rawmodel,dataframe,type = "class")
ptab <- list(er="Error",detail=matrix(nrow=6,ncol=1))
if (length(unique(pr))>1)
{
plot(rawmodel,main="Raw",branch=0.5,uniform = TRUE,compress = TRUE,margin=0.1)
text(rawmodel, use.n = TRUE,cex=0.75)
ptab <- epiR::epi.tests(table(pr==0,dataframe[,outcome]==0))
}

pander::pander(table(dataframe[,outcome],pr))
pander::pander(ptab$detail[c(5,3,4,6),])
| 5 |
diag.ac |
0.871 |
0.818 |
0.914 |
| 3 |
se |
0.700 |
0.579 |
0.804 |
| 4 |
sp |
0.957 |
0.909 |
0.984 |
| 6 |
diag.or |
52.111 |
19.864 |
136.710 |
par(op)
par(xpd = TRUE)
DEdataframe[,outcome] <- factor(DEdataframe[,outcome])
IDeAmodel <- rpart(paste(outcome,"~."),DEdataframe[,c(outcome,varlistcV)],control=rpart.control(maxdepth=3))
pr <- predict(IDeAmodel,DEdataframe,type = "class")
ptab <- list(er="Error",detail=matrix(nrow=6,ncol=1))
if (length(unique(pr))>1)
{
plot(IDeAmodel,main="ILAA",branch=0.5,uniform = TRUE,compress = TRUE,margin=0.1)
text(IDeAmodel, use.n = TRUE,cex=0.75)
ptab <- epiR::epi.tests(table(pr==0,DEdataframe[,outcome]==0))
}

pander::pander(table(DEdataframe[,outcome],pr))
pander::pander(ptab$detail[c(5,3,4,6),])
| 5 |
diag.ac |
0.919 |
0.874 |
0.952 |
| 3 |
se |
0.900 |
0.805 |
0.959 |
| 4 |
sp |
0.929 |
0.873 |
0.965 |
| 6 |
diag.or |
117.000 |
42.543 |
321.768 |
par(op)
par(xpd = TRUE)
PCAdataframe[,outcome] <- factor(PCAdataframe[,outcome])
PCAmodel <- rpart(paste(outcome,"~."),PCAdataframe,control=rpart.control(maxdepth=3))
pr <- predict(PCAmodel,PCAdataframe,type = "class")
ptab <- list(er="Error",detail=matrix(nrow=6,ncol=1))
if (length(unique(pr))>1)
{
plot(PCAmodel,main="PCA",branch=0.5,uniform = TRUE,compress = TRUE,margin=0.1)
text(PCAmodel, use.n = TRUE,cex=0.75)
ptab <- epiR::epi.tests(table(pr==0,PCAdataframe[,outcome]==0))
}

pander::pander(table(PCAdataframe[,outcome],pr))
pander::pander(ptab$detail[c(5,3,4,6),])
| 5 |
diag.ac |
0.905 |
0.857 |
0.941 |
| 3 |
se |
0.843 |
0.736 |
0.919 |
| 4 |
sp |
0.936 |
0.881 |
0.970 |
| 6 |
diag.or |
78.071 |
30.711 |
198.465 |
par(op)
EFA
EFAdataframe[,outcome] <- factor(EFAdataframe[,outcome])
EFAmodel <- rpart(paste(outcome,"~."),EFAdataframe,control=rpart.control(maxdepth=3))
pr <- predict(EFAmodel,EFAdataframe,type = "class")
ptab <- list(er="Error",detail=matrix(nrow=6,ncol=1))
if (length(unique(pr))>1)
{
plot(EFAmodel,main="EFA",branch=0.5,uniform = TRUE,compress = TRUE,margin=0.1)
text(EFAmodel, use.n = TRUE,cex=0.75)
ptab <- epiR::epi.tests(table(pr==0,EFAdataframe[,outcome]==0))
}

pander::pander(table(EFAdataframe[,outcome],pr))
pander::pander(ptab$detail[c(5,3,4,6),])
| 5 |
diag.ac |
0.943 |
0.902 |
0.970 |
| 3 |
se |
0.843 |
0.736 |
0.919 |
| 4 |
sp |
0.993 |
0.961 |
1.000 |
| 6 |
diag.or |
745.545 |
94.111 |
5906.216 |
par(op)